home *** CD-ROM | disk | FTP | other *** search
- # Source Generated with Decompyle++
- # File: in.pyc (Python 2.6)
-
- """
- The io module provides the Python interfaces to stream handling. The
- builtin open function is defined in this module.
-
- At the top of the I/O hierarchy is the abstract base class IOBase. It
- defines the basic interface to a stream. Note, however, that there is no
- separation between reading and writing to streams; implementations are
- allowed to throw an IOError if they do not support a given operation.
-
- Extending IOBase is RawIOBase which deals simply with the reading and
- writing of raw bytes to a stream. FileIO subclasses RawIOBase to provide
- an interface to OS files.
-
- BufferedIOBase deals with buffering on a raw byte stream (RawIOBase). Its
- subclasses, BufferedWriter, BufferedReader, and BufferedRWPair buffer
- streams that are readable, writable, and both respectively.
- BufferedRandom provides a buffered interface to random access
- streams. BytesIO is a simple stream of in-memory bytes.
-
- Another IOBase subclass, TextIOBase, deals with the encoding and decoding
- of streams into text. TextIOWrapper, which extends it, is a buffered text
- interface to a buffered raw stream (`BufferedIOBase`). Finally, StringIO
- is a in-memory stream for text.
-
- Argument names are not part of the specification, and only the arguments
- of open() are intended to be used as keyword arguments.
-
- data:
-
- DEFAULT_BUFFER_SIZE
-
- An int containing the default buffer size used by the module's buffered
- I/O classes. open() uses the file's blksize (as obtained by os.stat) if
- possible.
- """
- from __future__ import print_function
- from __future__ import unicode_literals
- __author__ = 'Guido van Rossum <guido@python.org>, Mike Verdone <mike.verdone@gmail.com>, Mark Russell <mark.russell@zen.co.uk>'
- __all__ = [
- 'BlockingIOError',
- 'open',
- 'IOBase',
- 'RawIOBase',
- 'FileIO',
- 'BytesIO',
- 'StringIO',
- 'BufferedIOBase',
- 'BufferedReader',
- 'BufferedWriter',
- 'BufferedRWPair',
- 'BufferedRandom',
- 'TextIOBase',
- 'TextIOWrapper']
- import os
- import abc
- import codecs
- import _fileio
- import threading
- DEFAULT_BUFFER_SIZE = 8 * 1024
- __metaclass__ = type
-
- class BlockingIOError(IOError):
- '''Exception raised when I/O would block on a non-blocking I/O stream.'''
-
- def __init__(self, errno, strerror, characters_written = 0):
- IOError.__init__(self, errno, strerror)
- self.characters_written = characters_written
-
-
-
- def open(file, mode = 'r', buffering = None, encoding = None, errors = None, newline = None, closefd = True):
- """Open file and return a stream. If the file cannot be opened, an IOError is
- raised.
-
- file is either a string giving the name (and the path if the file
- isn't in the current working directory) of the file to be opened or an
- integer file descriptor of the file to be wrapped. (If a file
- descriptor is given, it is closed when the returned I/O object is
- closed, unless closefd is set to False.)
-
- mode is an optional string that specifies the mode in which the file
- is opened. It defaults to 'r' which means open for reading in text
- mode. Other common values are 'w' for writing (truncating the file if
- it already exists), and 'a' for appending (which on some Unix systems,
- means that all writes append to the end of the file regardless of the
- current seek position). In text mode, if encoding is not specified the
- encoding used is platform dependent. (For reading and writing raw
- bytes use binary mode and leave encoding unspecified.) The available
- modes are:
-
- ========= ===============================================================
- Character Meaning
- --------- ---------------------------------------------------------------
- 'r' open for reading (default)
- 'w' open for writing, truncating the file first
- 'a' open for writing, appending to the end of the file if it exists
- 'b' binary mode
- 't' text mode (default)
- '+' open a disk file for updating (reading and writing)
- 'U' universal newline mode (for backwards compatibility; unneeded
- for new code)
- ========= ===============================================================
-
- The default mode is 'rt' (open for reading text). For binary random
- access, the mode 'w+b' opens and truncates the file to 0 bytes, while
- 'r+b' opens the file without truncation.
-
- Python distinguishes between files opened in binary and text modes,
- even when the underlying operating system doesn't. Files opened in
- binary mode (appending 'b' to the mode argument) return contents as
- bytes objects without any decoding. In text mode (the default, or when
- 't' is appended to the mode argument), the contents of the file are
- returned as strings, the bytes having been first decoded using a
- platform-dependent encoding or using the specified encoding if given.
-
- buffering is an optional integer used to set the buffering policy. By
- default full buffering is on. Pass 0 to switch buffering off (only
- allowed in binary mode), 1 to set line buffering, and an integer > 1
- for full buffering.
-
- encoding is the name of the encoding used to decode or encode the
- file. This should only be used in text mode. The default encoding is
- platform dependent, but any encoding supported by Python can be
- passed. See the codecs module for the list of supported encodings.
-
- errors is an optional string that specifies how encoding errors are to
- be handled---this argument should not be used in binary mode. Pass
- 'strict' to raise a ValueError exception if there is an encoding error
- (the default of None has the same effect), or pass 'ignore' to ignore
- errors. (Note that ignoring encoding errors can lead to data loss.)
- See the documentation for codecs.register for a list of the permitted
- encoding error strings.
-
- newline controls how universal newlines works (it only applies to text
- mode). It can be None, '', '\\n', '\\r', and '\\r\\n'. It works as
- follows:
-
- * On input, if newline is None, universal newlines mode is
- enabled. Lines in the input can end in '\\n', '\\r', or '\\r\\n', and
- these are translated into '\\n' before being returned to the
- caller. If it is '', universal newline mode is enabled, but line
- endings are returned to the caller untranslated. If it has any of
- the other legal values, input lines are only terminated by the given
- string, and the line ending is returned to the caller untranslated.
-
- * On output, if newline is None, any '\\n' characters written are
- translated to the system default line separator, os.linesep. If
- newline is '', no translation takes place. If newline is any of the
- other legal values, any '\\n' characters written are translated to
- the given string.
-
- If closefd is False, the underlying file descriptor will be kept open
- when the file is closed. This does not work when a file name is given
- and must be True in that case.
-
- open() returns a file object whose type depends on the mode, and
- through which the standard file operations such as reading and writing
- are performed. When open() is used to open a file in a text mode ('w',
- 'r', 'wt', 'rt', etc.), it returns a TextIOWrapper. When used to open
- a file in a binary mode, the returned class varies: in read binary
- mode, it returns a BufferedReader; in write binary and append binary
- modes, it returns a BufferedWriter, and in read/write mode, it returns
- a BufferedRandom.
-
- It is also possible to use a string or bytearray as a file for both
- reading and writing. For strings StringIO can be used like a file
- opened in a text mode, and for bytes a BytesIO can be used like a file
- opened in a binary mode.
- """
- if not isinstance(file, (basestring, int)):
- raise TypeError('invalid file: %r' % file)
- isinstance(file, (basestring, int))
- if not isinstance(mode, basestring):
- raise TypeError('invalid mode: %r' % mode)
- isinstance(mode, basestring)
- if buffering is not None and not isinstance(buffering, int):
- raise TypeError('invalid buffering: %r' % buffering)
- not isinstance(buffering, int)
- if encoding is not None and not isinstance(encoding, basestring):
- raise TypeError('invalid encoding: %r' % encoding)
- not isinstance(encoding, basestring)
- if errors is not None and not isinstance(errors, basestring):
- raise TypeError('invalid errors: %r' % errors)
- not isinstance(errors, basestring)
- modes = set(mode)
- if modes - set('arwb+tU') or len(mode) > len(modes):
- raise ValueError('invalid mode: %r' % mode)
- len(mode) > len(modes)
- reading = 'r' in modes
- writing = 'w' in modes
- appending = 'a' in modes
- updating = '+' in modes
- text = 't' in modes
- binary = 'b' in modes
- if 'U' in modes:
- if writing or appending:
- raise ValueError("can't use U and writing mode at once")
- appending
- reading = True
-
- if text and binary:
- raise ValueError("can't have text and binary mode at once")
- binary
- if reading + writing + appending > 1:
- raise ValueError("can't have read/write/append mode at once")
- reading + writing + appending > 1
- if not reading and writing or appending:
- raise ValueError('must have exactly one of read/write/append mode')
- appending
- if binary and encoding is not None:
- raise ValueError("binary mode doesn't take an encoding argument")
- encoding is not None
- if binary and errors is not None:
- raise ValueError("binary mode doesn't take an errors argument")
- errors is not None
- if binary and newline is not None:
- raise ValueError("binary mode doesn't take a newline argument")
- newline is not None
- if not reading or 'r':
- pass
- if not writing or 'w':
- pass
- if not appending or 'a':
- pass
- if not updating or '+':
- pass
- raw = FileIO(file, '' + '' + '' + '', closefd)
- if buffering is None:
- buffering = -1
-
- line_buffering = False
- if (buffering == 1 or buffering < 0) and raw.isatty():
- buffering = -1
- line_buffering = True
-
- if buffering < 0:
- buffering = DEFAULT_BUFFER_SIZE
-
- try:
- bs = os.fstat(raw.fileno()).st_blksize
- except (os.error, AttributeError):
- pass
-
- if bs > 1:
- buffering = bs
-
-
- if buffering < 0:
- raise ValueError('invalid buffering size')
- buffering < 0
- if buffering == 0:
- if binary:
- return raw
- raise ValueError("can't have unbuffered text I/O")
- buffering == 0
- if updating:
- buffer = BufferedRandom(raw, buffering)
- elif writing or appending:
- buffer = BufferedWriter(raw, buffering)
- elif reading:
- buffer = BufferedReader(raw, buffering)
- else:
- raise ValueError('unknown mode: %r' % mode)
- if appending:
- return buffer
- text = TextIOWrapper(buffer, encoding, errors, newline, line_buffering)
- text.mode = mode
- return text
-
-
- class _DocDescriptor:
- '''Helper for builtins.open.__doc__
- '''
-
- def __get__(self, obj, typ):
- return "open(file, mode='r', buffering=None, encoding=None, errors=None, newline=None, closefd=True)\n\n" + open.__doc__
-
-
-
- class OpenWrapper:
- """Wrapper for builtins.open
-
- Trick so that open won't become a bound method when stored
- as a class variable (as dumbdbm does).
-
- See initstdio() in Python/pythonrun.c.
- """
- __doc__ = _DocDescriptor()
-
- def __new__(cls, *args, **kwargs):
- return open(*args, **kwargs)
-
-
-
- class UnsupportedOperation(ValueError, IOError):
- pass
-
-
- class IOBase(object):
- """The abstract base class for all I/O classes, acting on streams of
- bytes. There is no public constructor.
-
- This class provides dummy implementations for many methods that
- derived classes can override selectively; the default implementations
- represent a file that cannot be read, written or seeked.
-
- Even though IOBase does not declare read, readinto, or write because
- their signatures will vary, implementations and clients should
- consider those methods part of the interface. Also, implementations
- may raise a IOError when operations they do not support are called.
-
- The basic type used for binary data read from or written to a file is
- bytes. bytearrays are accepted too, and in some cases (such as
- readinto) needed. Text I/O classes work with str data.
-
- Note that calling any method (even inquiries) on a closed stream is
- undefined. Implementations may raise IOError in this case.
-
- IOBase (and its subclasses) support the iterator protocol, meaning
- that an IOBase object can be iterated over yielding the lines in a
- stream.
-
- IOBase also supports the :keyword:`with` statement. In this example,
- fp is closed after the suite of the with statment is complete:
-
- with open('spam.txt', 'r') as fp:
- fp.write('Spam and eggs!')
- """
- __metaclass__ = abc.ABCMeta
-
- def _unsupported(self, name):
- '''Internal: raise an exception for unsupported operations.'''
- raise UnsupportedOperation('%s.%s() not supported' % (self.__class__.__name__, name))
-
-
- def seek(self, pos, whence = 0):
- '''Change stream position.
-
- Change the stream position to byte offset offset. offset is
- interpreted relative to the position indicated by whence. Values
- for whence are:
-
- * 0 -- start of stream (the default); offset should be zero or positive
- * 1 -- current stream position; offset may be negative
- * 2 -- end of stream; offset is usually negative
-
- Return the new absolute position.
- '''
- self._unsupported('seek')
-
-
- def tell(self):
- '''Return current stream position.'''
- return self.seek(0, 1)
-
-
- def truncate(self, pos = None):
- '''Truncate file to size bytes.
-
- Size defaults to the current IO position as reported by tell(). Return
- the new size.
- '''
- self._unsupported('truncate')
-
-
- def flush(self):
- '''Flush write buffers, if applicable.
-
- This is not implemented for read-only and non-blocking streams.
- '''
- pass
-
- __closed = False
-
- def close(self):
- '''Flush and close the IO object.
-
- This method has no effect if the file is already closed.
- '''
- if not self._IOBase__closed:
-
- try:
- self.flush()
- except IOError:
- pass
-
- self._IOBase__closed = True
-
-
-
- def __del__(self):
- '''Destructor. Calls close().'''
-
- try:
- self.close()
- except:
- pass
-
-
-
- def seekable(self):
- '''Return whether object supports random access.
-
- If False, seek(), tell() and truncate() will raise IOError.
- This method may need to do a test seek().
- '''
- return False
-
-
- def _checkSeekable(self, msg = None):
- '''Internal: raise an IOError if file is not seekable
- '''
- if not self.seekable():
- raise None(IOError if msg is None else msg)
- self.seekable()
-
-
- def readable(self):
- '''Return whether object was opened for reading.
-
- If False, read() will raise IOError.
- '''
- return False
-
-
- def _checkReadable(self, msg = None):
- '''Internal: raise an IOError if file is not readable
- '''
- if not self.readable():
- raise None(IOError if msg is None else msg)
- self.readable()
-
-
- def writable(self):
- '''Return whether object was opened for writing.
-
- If False, write() and truncate() will raise IOError.
- '''
- return False
-
-
- def _checkWritable(self, msg = None):
- '''Internal: raise an IOError if file is not writable
- '''
- if not self.writable():
- raise None(IOError if msg is None else msg)
- self.writable()
-
-
- def closed(self):
- '''closed: bool. True iff the file has been closed.
-
- For backwards compatibility, this is a property, not a predicate.
- '''
- return self._IOBase__closed
-
- closed = property(closed)
-
- def _checkClosed(self, msg = None):
- '''Internal: raise an ValueError if file is closed
- '''
- if self.closed:
- raise None(ValueError if msg is None else msg)
- self.closed
-
-
- def __enter__(self):
- '''Context management protocol. Returns self.'''
- self._checkClosed()
- return self
-
-
- def __exit__(self, *args):
- '''Context management protocol. Calls close()'''
- self.close()
-
-
- def fileno(self):
- '''Returns underlying file descriptor if one exists.
-
- An IOError is raised if the IO object does not use a file descriptor.
- '''
- self._unsupported('fileno')
-
-
- def isatty(self):
- """Return whether this is an 'interactive' stream.
-
- Return False if it can't be determined.
- """
- self._checkClosed()
- return False
-
-
- def readline(self, limit = -1):
- """Read and return a line from the stream.
-
- If limit is specified, at most limit bytes will be read.
-
- The line terminator is always b'\\n' for binary files; for text
- files, the newlines argument to open can be used to select the line
- terminator(s) recognized.
- """
- self._checkClosed()
- if hasattr(self, 'peek'):
-
- def nreadahead():
- readahead = self.peek(1)
- if not readahead:
- return 1
- if not readahead.find(b'\n') + 1:
- pass
- n = len(readahead)
- if limit >= 0:
- n = min(n, limit)
-
- return n
-
- else:
-
- def nreadahead():
- return 1
-
- if limit is None:
- limit = -1
-
- if not isinstance(limit, (int, long)):
- raise TypeError('limit must be an integer')
- isinstance(limit, (int, long))
- res = bytearray()
- while limit < 0 or len(res) < limit:
- b = self.read(nreadahead())
- if not b:
- break
-
- res += b
- if res.endswith(b'\n'):
- break
- continue
- return bytes(res)
-
-
- def __iter__(self):
- self._checkClosed()
- return self
-
-
- def next(self):
- line = self.readline()
- if not line:
- raise StopIteration
- line
- return line
-
-
- def readlines(self, hint = None):
- '''Return a list of lines from the stream.
-
- hint can be specified to control the number of lines read: no more
- lines will be read if the total size (in bytes/characters) of all
- lines so far exceeds hint.
- '''
- if hint is None:
- hint = -1
-
- if not isinstance(hint, (int, long)):
- raise TypeError('hint must be an integer')
- isinstance(hint, (int, long))
- if hint <= 0:
- return list(self)
- n = 0
- lines = []
- for line in self:
- lines.append(line)
- n += len(line)
- if n >= hint:
- break
- continue
- hint <= 0
-
- return lines
-
-
- def writelines(self, lines):
- self._checkClosed()
- for line in lines:
- self.write(line)
-
-
-
-
- class RawIOBase(IOBase):
- '''Base class for raw binary I/O.'''
-
- def read(self, n = -1):
- '''Read and return up to n bytes.
-
- Returns an empty bytes array on EOF, or None if the object is
- set not to block and has no data to read.
- '''
- if n is None:
- n = -1
-
- if n < 0:
- return self.readall()
- b = bytearray(n.__index__())
- n = self.readinto(b)
- del b[n:]
- return bytes(b)
-
-
- def readall(self):
- '''Read until EOF, using multiple read() call.'''
- res = bytearray()
- while True:
- data = self.read(DEFAULT_BUFFER_SIZE)
- if not data:
- break
-
- res += data
- return bytes(res)
-
-
- def readinto(self, b):
- '''Read up to len(b) bytes into b.
-
- Returns number of bytes read (0 for EOF), or None if the object
- is set not to block as has no data to read.
- '''
- self._unsupported('readinto')
-
-
- def write(self, b):
- '''Write the given buffer to the IO stream.
-
- Returns the number of bytes written, which may be less than len(b).
- '''
- self._unsupported('write')
-
-
-
- class FileIO(_fileio._FileIO, RawIOBase):
- '''Raw I/O implementation for OS files.'''
-
- def __init__(self, name, mode = 'r', closefd = True):
- _fileio._FileIO.__init__(self, name, mode, closefd)
- self._name = name
-
-
- def close(self):
- _fileio._FileIO.close(self)
- RawIOBase.close(self)
-
-
- def name(self):
- return self._name
-
- name = property(name)
-
-
- class BufferedIOBase(IOBase):
- '''Base class for buffered IO objects.
-
- The main difference with RawIOBase is that the read() method
- supports omitting the size argument, and does not have a default
- implementation that defers to readinto().
-
- In addition, read(), readinto() and write() may raise
- BlockingIOError if the underlying raw stream is in non-blocking
- mode and not ready; unlike their raw counterparts, they will never
- return None.
-
- A typical implementation should not inherit from a RawIOBase
- implementation, but wrap one.
- '''
-
- def read(self, n = None):
- """Read and return up to n bytes.
-
- If the argument is omitted, None, or negative, reads and
- returns all data until EOF.
-
- If the argument is positive, and the underlying raw stream is
- not 'interactive', multiple raw reads may be issued to satisfy
- the byte count (unless EOF is reached first). But for
- interactive raw streams (XXX and for pipes?), at most one raw
- read will be issued, and a short result does not imply that
- EOF is imminent.
-
- Returns an empty bytes array on EOF.
-
- Raises BlockingIOError if the underlying raw stream has no
- data at the moment.
- """
- self._unsupported('read')
-
-
- def readinto(self, b):
- """Read up to len(b) bytes into b.
-
- Like read(), this may issue multiple reads to the underlying raw
- stream, unless the latter is 'interactive'.
-
- Returns the number of bytes read (0 for EOF).
-
- Raises BlockingIOError if the underlying raw stream has no
- data at the moment.
- """
- data = self.read(len(b))
- n = len(data)
-
- try:
- b[:n] = data
- except TypeError:
- err = None
- import array
- if not isinstance(b, array.array):
- raise err
- isinstance(b, array.array)
- b[:n] = array.array(b'b', data)
-
- return n
-
-
- def write(self, b):
- '''Write the given buffer to the IO stream.
-
- Return the number of bytes written, which is never less than
- len(b).
-
- Raises BlockingIOError if the buffer is full and the
- underlying raw stream cannot accept more data at the moment.
- '''
- self._unsupported('write')
-
-
-
- class _BufferedIOMixin(BufferedIOBase):
- '''A mixin implementation of BufferedIOBase with an underlying raw stream.
-
- This passes most requests on to the underlying raw stream. It
- does *not* provide implementations of read(), readinto() or
- write().
- '''
-
- def __init__(self, raw):
- self.raw = raw
-
-
- def seek(self, pos, whence = 0):
- return self.raw.seek(pos, whence)
-
-
- def tell(self):
- return self.raw.tell()
-
-
- def truncate(self, pos = None):
- self.flush()
- if pos is None:
- pos = self.tell()
-
- return self.raw.truncate(pos)
-
-
- def flush(self):
- self.raw.flush()
-
-
- def close(self):
- if not self.closed:
-
- try:
- self.flush()
- except IOError:
- pass
-
- self.raw.close()
-
-
-
- def seekable(self):
- return self.raw.seekable()
-
-
- def readable(self):
- return self.raw.readable()
-
-
- def writable(self):
- return self.raw.writable()
-
-
- def closed(self):
- return self.raw.closed
-
- closed = property(closed)
-
- def name(self):
- return self.raw.name
-
- name = property(name)
-
- def mode(self):
- return self.raw.mode
-
- mode = property(mode)
-
- def fileno(self):
- return self.raw.fileno()
-
-
- def isatty(self):
- return self.raw.isatty()
-
-
-
- class _BytesIO(BufferedIOBase):
- '''Buffered I/O implementation using an in-memory bytes buffer.'''
-
- def __init__(self, initial_bytes = None):
- buf = bytearray()
- if initial_bytes is not None:
- buf += bytearray(initial_bytes)
-
- self._buffer = buf
- self._pos = 0
-
-
- def getvalue(self):
- '''Return the bytes value (contents) of the buffer
- '''
- if self.closed:
- raise ValueError('getvalue on closed file')
- self.closed
- return bytes(self._buffer)
-
-
- def read(self, n = None):
- if self.closed:
- raise ValueError('read from closed file')
- self.closed
- if n is None:
- n = -1
-
- if not isinstance(n, (int, long)):
- raise TypeError('argument must be an integer')
- isinstance(n, (int, long))
- if n < 0:
- n = len(self._buffer)
-
- if len(self._buffer) <= self._pos:
- return b''
- newpos = min(len(self._buffer), self._pos + n)
- b = self._buffer[self._pos:newpos]
- self._pos = newpos
- return bytes(b)
-
-
- def read1(self, n):
- '''this is the same as read.
- '''
- return self.read(n)
-
-
- def write(self, b):
- if self.closed:
- raise ValueError('write to closed file')
- self.closed
- if isinstance(b, unicode):
- raise TypeError("can't write unicode to binary stream")
- isinstance(b, unicode)
- n = len(b)
- if n == 0:
- return 0
- pos = self._pos
- self._buffer[pos:pos + n] = b
- self._pos += n
- return n
-
-
- def seek(self, pos, whence = 0):
- if self.closed:
- raise ValueError('seek on closed file')
- self.closed
-
- try:
- pos = pos.__index__()
- except AttributeError:
- err = None
- raise TypeError('an integer is required')
-
- if whence == 0:
- if pos < 0:
- raise ValueError('negative seek position %r' % (pos,))
- pos < 0
- self._pos = pos
- elif whence == 1:
- self._pos = max(0, self._pos + pos)
- elif whence == 2:
- self._pos = max(0, len(self._buffer) + pos)
- else:
- raise ValueError('invalid whence value')
- return (whence == 0)._pos
-
-
- def tell(self):
- if self.closed:
- raise ValueError('tell on closed file')
- self.closed
- return self._pos
-
-
- def truncate(self, pos = None):
- if self.closed:
- raise ValueError('truncate on closed file')
- self.closed
- if pos is None:
- pos = self._pos
- elif pos < 0:
- raise ValueError('negative truncate position %r' % (pos,))
-
- del self._buffer[pos:]
- return self.seek(pos)
-
-
- def readable(self):
- return True
-
-
- def writable(self):
- return True
-
-
- def seekable(self):
- return True
-
-
-
- try:
- import _bytesio
-
- class BytesIO(_bytesio._BytesIO, BufferedIOBase):
- __doc__ = _bytesio._BytesIO.__doc__
-
- except ImportError:
- BytesIO = _BytesIO
-
-
- class BufferedReader(_BufferedIOMixin):
- '''BufferedReader(raw[, buffer_size])
-
- A buffer for a readable, sequential BaseRawIO object.
-
- The constructor creates a BufferedReader for the given readable raw
- stream and buffer_size. If buffer_size is omitted, DEFAULT_BUFFER_SIZE
- is used.
- '''
-
- def __init__(self, raw, buffer_size = DEFAULT_BUFFER_SIZE):
- '''Create a new buffered reader using the given readable raw IO object.
- '''
- raw._checkReadable()
- _BufferedIOMixin.__init__(self, raw)
- self.buffer_size = buffer_size
- self._reset_read_buf()
- self._read_lock = threading.Lock()
-
-
- def _reset_read_buf(self):
- self._read_buf = b''
- self._read_pos = 0
-
-
- def read(self, n = None):
- '''Read n bytes.
-
- Returns exactly n bytes of data unless the underlying raw IO
- stream reaches EOF or if the call would block in non-blocking
- mode. If n is negative, read until EOF or until read() would
- block.
- '''
- self._read_lock.__enter__()
-
- try:
- return self._read_unlocked(n)
- finally:
- pass
-
-
-
- def _read_unlocked(self, n = None):
- nodata_val = b''
- empty_values = (b'', None)
- buf = self._read_buf
- pos = self._read_pos
- if n is None or n == -1:
- self._reset_read_buf()
- chunks = [
- buf[pos:]]
- current_size = 0
- while True:
- chunk = self.raw.read()
- if chunk in empty_values:
- nodata_val = chunk
- break
-
- current_size += len(chunk)
- chunks.append(chunk)
- if not b''.join(chunks):
- pass
- return nodata_val
- avail = len(buf) - pos
- if n <= avail:
- self._read_pos += n
- return buf[pos:pos + n]
- chunks = [
- buf[pos:]]
- wanted = max(self.buffer_size, n)
- while avail < n:
- chunk = self.raw.read(wanted)
- avail += len(chunk)
- chunks.append(chunk)
- continue
- None if chunk in empty_values else n == -1
- n = min(n, avail)
- out = b''.join(chunks)
- self._read_buf = out[n:]
- self._read_pos = 0
- if out:
- return out[:n]
- return nodata_val
-
-
- def peek(self, n = 0):
- '''Returns buffered bytes without advancing the position.
-
- The argument indicates a desired minimal number of bytes; we
- do at most one raw read to satisfy it. We never return more
- than self.buffer_size.
- '''
- self._read_lock.__enter__()
-
- try:
- return self._peek_unlocked(n)
- finally:
- pass
-
-
-
- def _peek_unlocked(self, n = 0):
- want = min(n, self.buffer_size)
- have = len(self._read_buf) - self._read_pos
- if have < want:
- to_read = self.buffer_size - have
- current = self.raw.read(to_read)
- if current:
- self._read_buf = self._read_buf[self._read_pos:] + current
- self._read_pos = 0
-
-
- return self._read_buf[self._read_pos:]
-
-
- def read1(self, n):
- '''Reads up to n bytes, with at most one read() system call.'''
- if n <= 0:
- return b''
- self._read_lock.__enter__()
-
- try:
- self._peek_unlocked(1)
- return self._read_unlocked(min(n, len(self._read_buf) - self._read_pos))
- finally:
- pass
-
-
-
- def tell(self):
- return (self.raw.tell() - len(self._read_buf)) + self._read_pos
-
-
- def seek(self, pos, whence = 0):
- self._read_lock.__enter__()
-
- try:
- pos = self.raw.seek(pos, whence)
- self._reset_read_buf()
- return pos
- finally:
- pass
-
-
-
-
- class BufferedWriter(_BufferedIOMixin):
- '''A buffer for a writeable sequential RawIO object.
-
- The constructor creates a BufferedWriter for the given writeable raw
- stream. If the buffer_size is not given, it defaults to
- DEAFULT_BUFFER_SIZE. If max_buffer_size is omitted, it defaults to
- twice the buffer size.
- '''
-
- def __init__(self, raw, buffer_size = DEFAULT_BUFFER_SIZE, max_buffer_size = None):
- raw._checkWritable()
- _BufferedIOMixin.__init__(self, raw)
- self.buffer_size = buffer_size
- self.max_buffer_size = None if max_buffer_size is None else max_buffer_size
- self._write_buf = bytearray()
- self._write_lock = threading.Lock()
-
-
- def write(self, b):
- if self.closed:
- raise ValueError('write to closed file')
- self.closed
- if isinstance(b, unicode):
- raise TypeError("can't write unicode to binary stream")
- isinstance(b, unicode)
- self._write_lock.__enter__()
-
- try:
- if len(self._write_buf) > self.buffer_size:
-
- try:
- self._flush_unlocked()
- except BlockingIOError:
- self._write_lock.__exit__
- e = self._write_lock.__exit__
- self._write_lock
- raise BlockingIOError(e.errno, e.strerror, 0)
- except:
- self._write_lock.__exit__<EXCEPTION MATCH>BlockingIOError
-
-
- self._write_lock.__exit__
- before = len(self._write_buf)
- self._write_buf.extend(b)
- written = len(self._write_buf) - before
- if len(self._write_buf) > self.buffer_size:
-
- try:
- self._flush_unlocked()
- except BlockingIOError:
- self._write_lock.__exit__
- e = self._write_lock.__exit__
- self._write_lock
- if len(self._write_buf) > self.max_buffer_size:
- overage = len(self._write_buf) - self.max_buffer_size
- self._write_buf = self._write_buf[:self.max_buffer_size]
- raise BlockingIOError(e.errno, e.strerror, overage)
- len(self._write_buf) > self.max_buffer_size
- except:
- self._write_lock.__exit__<EXCEPTION MATCH>BlockingIOError
-
-
- self._write_lock.__exit__
- return written
- finally:
- pass
-
-
-
- def truncate(self, pos = None):
- self._write_lock.__enter__()
-
- try:
- self._flush_unlocked()
- return self.raw.truncate(pos)
- finally:
- pass
-
-
-
- def flush(self):
- self._write_lock.__enter__()
-
- try:
- self._flush_unlocked()
- finally:
- pass
-
-
-
- def _flush_unlocked(self):
- if self.closed:
- raise ValueError('flush of closed file')
- self.closed
- written = 0
-
- try:
- while self._write_buf:
- n = self.raw.write(self._write_buf)
- del self._write_buf[:n]
- written += n
- except BlockingIOError:
- e = None
- n = e.characters_written
- del self._write_buf[:n]
- written += n
- raise BlockingIOError(e.errno, e.strerror, written)
-
-
-
- def tell(self):
- return self.raw.tell() + len(self._write_buf)
-
-
- def seek(self, pos, whence = 0):
- self._write_lock.__enter__()
-
- try:
- self._flush_unlocked()
- return self.raw.seek(pos, whence)
- finally:
- pass
-
-
-
-
- class BufferedRWPair(BufferedIOBase):
- '''A buffered reader and writer object together.
-
- A buffered reader object and buffered writer object put together to
- form a sequential IO object that can read and write. This is typically
- used with a socket or two-way pipe.
-
- reader and writer are RawIOBase objects that are readable and
- writeable respectively. If the buffer_size is omitted it defaults to
- DEFAULT_BUFFER_SIZE. The max_buffer_size (for the buffered writer)
- defaults to twice the buffer size.
- '''
-
- def __init__(self, reader, writer, buffer_size = DEFAULT_BUFFER_SIZE, max_buffer_size = None):
- '''Constructor.
-
- The arguments are two RawIO instances.
- '''
- reader._checkReadable()
- writer._checkWritable()
- self.reader = BufferedReader(reader, buffer_size)
- self.writer = BufferedWriter(writer, buffer_size, max_buffer_size)
-
-
- def read(self, n = None):
- if n is None:
- n = -1
-
- return self.reader.read(n)
-
-
- def readinto(self, b):
- return self.reader.readinto(b)
-
-
- def write(self, b):
- return self.writer.write(b)
-
-
- def peek(self, n = 0):
- return self.reader.peek(n)
-
-
- def read1(self, n):
- return self.reader.read1(n)
-
-
- def readable(self):
- return self.reader.readable()
-
-
- def writable(self):
- return self.writer.writable()
-
-
- def flush(self):
- return self.writer.flush()
-
-
- def close(self):
- self.writer.close()
- self.reader.close()
-
-
- def isatty(self):
- if not self.reader.isatty():
- pass
- return self.writer.isatty()
-
-
- def closed(self):
- return self.writer.closed
-
- closed = property(closed)
-
-
- class BufferedRandom(BufferedWriter, BufferedReader):
- '''A buffered interface to random access streams.
-
- The constructor creates a reader and writer for a seekable stream,
- raw, given in the first argument. If the buffer_size is omitted it
- defaults to DEFAULT_BUFFER_SIZE. The max_buffer_size (for the buffered
- writer) defaults to twice the buffer size.
- '''
-
- def __init__(self, raw, buffer_size = DEFAULT_BUFFER_SIZE, max_buffer_size = None):
- raw._checkSeekable()
- BufferedReader.__init__(self, raw, buffer_size)
- BufferedWriter.__init__(self, raw, buffer_size, max_buffer_size)
-
-
- def seek(self, pos, whence = 0):
- self.flush()
- pos = self.raw.seek(pos, whence)
- self._read_lock.__enter__()
-
- try:
- self._reset_read_buf()
- finally:
- pass
-
- return pos
-
-
- def tell(self):
- if self._write_buf:
- return self.raw.tell() + len(self._write_buf)
- return BufferedReader.tell(self)
-
-
- def truncate(self, pos = None):
- if pos is None:
- pos = self.tell()
-
- self.seek(pos)
- return BufferedWriter.truncate(self)
-
-
- def read(self, n = None):
- if n is None:
- n = -1
-
- self.flush()
- return BufferedReader.read(self, n)
-
-
- def readinto(self, b):
- self.flush()
- return BufferedReader.readinto(self, b)
-
-
- def peek(self, n = 0):
- self.flush()
- return BufferedReader.peek(self, n)
-
-
- def read1(self, n):
- self.flush()
- return BufferedReader.read1(self, n)
-
-
- def write(self, b):
- return BufferedWriter.write(self, b)
-
-
-
- class TextIOBase(IOBase):
- """Base class for text I/O.
-
- This class provides a character and line based interface to stream
- I/O. There is no readinto method because Python's character strings
- are immutable. There is no public constructor.
- """
-
- def read(self, n = -1):
- '''Read at most n characters from stream.
-
- Read from underlying buffer until we have n characters or we hit EOF.
- If n is negative or omitted, read until EOF.
- '''
- self._unsupported('read')
-
-
- def write(self, s):
- '''Write string s to stream.'''
- self._unsupported('write')
-
-
- def truncate(self, pos = None):
- '''Truncate size to pos.'''
- self._unsupported('truncate')
-
-
- def readline(self):
- '''Read until newline or EOF.
-
- Returns an empty string if EOF is hit immediately.
- '''
- self._unsupported('readline')
-
-
- def encoding(self):
- '''Subclasses should override.'''
- pass
-
- encoding = property(encoding)
-
- def newlines(self):
- '''Line endings translated so far.
-
- Only line endings translated during reading are considered.
-
- Subclasses should override.
- '''
- pass
-
- newlines = property(newlines)
-
-
- class IncrementalNewlineDecoder(codecs.IncrementalDecoder):
- '''Codec used when reading a file in universal newlines mode.
- It wraps another incremental decoder, translating \\r\\n and \\r into \\n.
- It also records the types of newlines encountered.
- When used with translate=False, it ensures that the newline sequence is
- returned in one piece.
- '''
-
- def __init__(self, decoder, translate, errors = 'strict'):
- codecs.IncrementalDecoder.__init__(self, errors = errors)
- self.translate = translate
- self.decoder = decoder
- self.seennl = 0
- self.pendingcr = False
-
-
- def decode(self, input, final = False):
- output = self.decoder.decode(input, final = final)
- if self.pendingcr:
- if output or final:
- output = '\r' + output
- self.pendingcr = False
-
- if output.endswith('\r') and not final:
- output = output[:-1]
- self.pendingcr = True
-
- crlf = output.count('\r\n')
- cr = output.count('\r') - crlf
- lf = output.count('\n') - crlf
- if lf:
- pass
- if cr:
- pass
- if crlf:
- pass
- self.seennl |= self._LF | self._CR | self._CRLF
- if self.translate:
- if crlf:
- output = output.replace('\r\n', '\n')
-
- if cr:
- output = output.replace('\r', '\n')
-
-
- return output
-
-
- def getstate(self):
- (buf, flag) = self.decoder.getstate()
- flag <<= 1
- if self.pendingcr:
- flag |= 1
-
- return (buf, flag)
-
-
- def setstate(self, state):
- (buf, flag) = state
- self.pendingcr = bool(flag & 1)
- self.decoder.setstate((buf, flag >> 1))
-
-
- def reset(self):
- self.seennl = 0
- self.pendingcr = False
- self.decoder.reset()
-
- _LF = 1
- _CR = 2
- _CRLF = 4
-
- def newlines(self):
- return (None, '\n', '\r', ('\r', '\n'), '\r\n', ('\n', '\r\n'), ('\r', '\r\n'), ('\r', '\n', '\r\n'))[self.seennl]
-
- newlines = property(newlines)
-
-
- class TextIOWrapper(TextIOBase):
- '''Character and line based layer over a BufferedIOBase object, buffer.
-
- encoding gives the name of the encoding that the stream will be
- decoded or encoded with. It defaults to locale.getpreferredencoding.
-
- errors determines the strictness of encoding and decoding (see the
- codecs.register) and defaults to "strict".
-
- newline can be None, \'\', \'\\n\', \'\\r\', or \'\\r\\n\'. It controls the
- handling of line endings. If it is None, universal newlines is
- enabled. With this enabled, on input, the lines endings \'\\n\', \'\\r\',
- or \'\\r\\n\' are translated to \'\\n\' before being returned to the
- caller. Conversely, on output, \'\\n\' is translated to the system
- default line separator, os.linesep. If newline is any other of its
- legal values, that newline becomes the newline when the file is read
- and it is returned untranslated. On output, \'\\n\' is converted to the
- newline.
-
- If line_buffering is True, a call to flush is implied when a call to
- write contains a newline character.
- '''
- _CHUNK_SIZE = 128
-
- def __init__(self, buffer, encoding = None, errors = None, newline = None, line_buffering = False):
- if newline not in (None, '', '\n', '\r', '\r\n'):
- raise ValueError('illegal newline value: %r' % (newline,))
- newline not in (None, '', '\n', '\r', '\r\n')
- if encoding is None:
-
- try:
- encoding = os.device_encoding(buffer.fileno())
- except (AttributeError, UnsupportedOperation):
- pass
-
- if encoding is None:
-
- try:
- import locale
- except ImportError:
- encoding = 'ascii'
-
- encoding = locale.getpreferredencoding()
-
-
- if not isinstance(encoding, basestring):
- raise ValueError('invalid encoding: %r' % encoding)
- isinstance(encoding, basestring)
- if errors is None:
- errors = 'strict'
- elif not isinstance(errors, basestring):
- raise ValueError('invalid errors: %r' % errors)
-
- self.buffer = buffer
- self._line_buffering = line_buffering
- self._encoding = encoding
- self._errors = errors
- self._readuniversal = not newline
- self._readtranslate = newline is None
- self._readnl = newline
- self._writetranslate = newline != ''
- if not newline:
- pass
- self._writenl = os.linesep
- self._encoder = None
- self._decoder = None
- self._decoded_chars = ''
- self._decoded_chars_used = 0
- self._snapshot = None
- self._seekable = self._telling = self.buffer.seekable()
-
-
- def encoding(self):
- return self._encoding
-
- encoding = property(encoding)
-
- def errors(self):
- return self._errors
-
- errors = property(errors)
-
- def line_buffering(self):
- return self._line_buffering
-
- line_buffering = property(line_buffering)
-
- def seekable(self):
- return self._seekable
-
-
- def readable(self):
- return self.buffer.readable()
-
-
- def writable(self):
- return self.buffer.writable()
-
-
- def flush(self):
- self.buffer.flush()
- self._telling = self._seekable
-
-
- def close(self):
-
- try:
- self.flush()
- except:
- pass
-
- self.buffer.close()
-
-
- def closed(self):
- return self.buffer.closed
-
- closed = property(closed)
-
- def name(self):
- return self.buffer.name
-
- name = property(name)
-
- def fileno(self):
- return self.buffer.fileno()
-
-
- def isatty(self):
- return self.buffer.isatty()
-
-
- def write(self, s):
- if self.closed:
- raise ValueError('write to closed file')
- self.closed
- if not isinstance(s, unicode):
- raise TypeError("can't write %s to text stream" % s.__class__.__name__)
- isinstance(s, unicode)
- length = len(s)
- if self._writetranslate or self._line_buffering:
- pass
- haslf = '\n' in s
- if haslf and self._writetranslate and self._writenl != '\n':
- s = s.replace('\n', self._writenl)
-
- if not self._encoder:
- pass
- encoder = self._get_encoder()
- b = encoder.encode(s)
- self.buffer.write(b)
- if self._line_buffering:
- if haslf or '\r' in s:
- self.flush()
-
- self._snapshot = None
- if self._decoder:
- self._decoder.reset()
-
- return length
-
-
- def _get_encoder(self):
- make_encoder = codecs.getincrementalencoder(self._encoding)
- self._encoder = make_encoder(self._errors)
- return self._encoder
-
-
- def _get_decoder(self):
- make_decoder = codecs.getincrementaldecoder(self._encoding)
- decoder = make_decoder(self._errors)
- if self._readuniversal:
- decoder = IncrementalNewlineDecoder(decoder, self._readtranslate)
-
- self._decoder = decoder
- return decoder
-
-
- def _set_decoded_chars(self, chars):
- '''Set the _decoded_chars buffer.'''
- self._decoded_chars = chars
- self._decoded_chars_used = 0
-
-
- def _get_decoded_chars(self, n = None):
- '''Advance into the _decoded_chars buffer.'''
- offset = self._decoded_chars_used
- if n is None:
- chars = self._decoded_chars[offset:]
- else:
- chars = self._decoded_chars[offset:offset + n]
- self._decoded_chars_used += len(chars)
- return chars
-
-
- def _rewind_decoded_chars(self, n):
- '''Rewind the _decoded_chars buffer.'''
- if self._decoded_chars_used < n:
- raise AssertionError('rewind decoded_chars out of bounds')
- self._decoded_chars_used < n
- self._decoded_chars_used -= n
-
-
- def _read_chunk(self):
- '''
- Read and decode the next chunk of data from the BufferedReader.
-
- The return value is True unless EOF was reached. The decoded string
- is placed in self._decoded_chars (replacing its previous value).
- The entire input chunk is sent to the decoder, though some of it
- may remain buffered in the decoder, yet to be converted.
- '''
- if self._decoder is None:
- raise ValueError('no decoder')
- self._decoder is None
- if self._telling:
- (dec_buffer, dec_flags) = self._decoder.getstate()
-
- input_chunk = self.buffer.read1(self._CHUNK_SIZE)
- eof = not input_chunk
- self._set_decoded_chars(self._decoder.decode(input_chunk, eof))
- if self._telling:
- self._snapshot = (dec_flags, dec_buffer + input_chunk)
-
- return not eof
-
-
- def _pack_cookie(self, position, dec_flags = 0, bytes_to_feed = 0, need_eof = 0, chars_to_skip = 0):
- return position | dec_flags << 64 | bytes_to_feed << 128 | chars_to_skip << 192 | bool(need_eof) << 256
-
-
- def _unpack_cookie(self, bigint):
- (rest, position) = divmod(bigint, 0x10000000000000000L)
- (rest, dec_flags) = divmod(rest, 0x10000000000000000L)
- (rest, bytes_to_feed) = divmod(rest, 0x10000000000000000L)
- (need_eof, chars_to_skip) = divmod(rest, 0x10000000000000000L)
- return (position, dec_flags, bytes_to_feed, need_eof, chars_to_skip)
-
-
- def tell(self):
- if not self._seekable:
- raise IOError('underlying stream is not seekable')
- self._seekable
- if not self._telling:
- raise IOError('telling position disabled by next() call')
- self._telling
- self.flush()
- position = self.buffer.tell()
- decoder = self._decoder
- if decoder is None or self._snapshot is None:
- if self._decoded_chars:
- raise AssertionError('pending decoded text')
- self._decoded_chars
- return position
- (dec_flags, next_input) = self._snapshot
- position -= len(next_input)
- chars_to_skip = self._decoded_chars_used
- if chars_to_skip == 0:
- return self._pack_cookie(position, dec_flags)
- saved_state = decoder.getstate()
-
- try:
- decoder.setstate((b'', dec_flags))
- start_pos = position
- start_flags = dec_flags
- bytes_fed = 0
- chars_decoded = 0
- need_eof = 0
- for next_byte in next_input:
- bytes_fed += 1
- chars_decoded += len(decoder.decode(next_byte))
- (dec_buffer, dec_flags) = decoder.getstate()
- if chars_decoded >= chars_to_skip:
- break
- continue
- None if not dec_buffer and chars_decoded <= chars_to_skip else self._snapshot is None
- else:
- chars_decoded += len(decoder.decode(b'', final = True))
- need_eof = 1
- if chars_decoded < chars_to_skip:
- raise IOError("can't reconstruct logical file position")
- return self._pack_cookie(start_pos, start_flags, bytes_fed, need_eof, chars_to_skip)
- finally:
- decoder.setstate(saved_state)
-
-
-
- def truncate(self, pos = None):
- self.flush()
- if pos is None:
- pos = self.tell()
-
- self.seek(pos)
- return self.buffer.truncate()
-
-
- def seek(self, cookie, whence = 0):
- if self.closed:
- raise ValueError('tell on closed file')
- self.closed
- if not self._seekable:
- raise IOError('underlying stream is not seekable')
- self._seekable
- if whence == 1:
- if cookie != 0:
- raise IOError("can't do nonzero cur-relative seeks")
- cookie != 0
- whence = 0
- cookie = self.tell()
-
- if whence == 2:
- if cookie != 0:
- raise IOError("can't do nonzero end-relative seeks")
- cookie != 0
- self.flush()
- position = self.buffer.seek(0, 2)
- self._set_decoded_chars('')
- self._snapshot = None
- if self._decoder:
- self._decoder.reset()
-
- return position
- if whence != 0:
- raise ValueError('invalid whence (%r, should be 0, 1 or 2)' % (whence,))
- whence != 0
- if cookie < 0:
- raise ValueError('negative seek position %r' % (cookie,))
- cookie < 0
- self.flush()
- (start_pos, dec_flags, bytes_to_feed, need_eof, chars_to_skip) = self._unpack_cookie(cookie)
- self.buffer.seek(start_pos)
- self._set_decoded_chars('')
- self._snapshot = None
- if self._decoder and dec_flags or chars_to_skip:
- if not self._decoder:
- pass
- self._decoder = self._get_decoder()
- self._decoder.setstate((b'', dec_flags))
- self._snapshot = (dec_flags, b'')
-
- if chars_to_skip:
- input_chunk = self.buffer.read(bytes_to_feed)
- self._set_decoded_chars(self._decoder.decode(input_chunk, need_eof))
- self._snapshot = (dec_flags, input_chunk)
- if len(self._decoded_chars) < chars_to_skip:
- raise IOError("can't restore logical file position")
- len(self._decoded_chars) < chars_to_skip
- self._decoded_chars_used = chars_to_skip
-
- return cookie
-
-
- def read(self, n = None):
- if n is None:
- n = -1
-
- if not self._decoder:
- pass
- decoder = self._get_decoder()
- if n < 0:
- result = self._get_decoded_chars() + decoder.decode(self.buffer.read(), final = True)
- self._set_decoded_chars('')
- self._snapshot = None
- return result
- eof = False
- result = self._get_decoded_chars(n)
- while len(result) < n and not eof:
- eof = not self._read_chunk()
- result += self._get_decoded_chars(n - len(result))
- continue
- n < 0
- return result
-
-
- def next(self):
- self._telling = False
- line = self.readline()
- if not line:
- self._snapshot = None
- self._telling = self._seekable
- raise StopIteration
- line
- return line
-
-
- def readline(self, limit = None):
- if self.closed:
- raise ValueError('read from closed file')
- self.closed
- if limit is None:
- limit = -1
-
- if not isinstance(limit, (int, long)):
- raise TypeError('limit must be an integer')
- isinstance(limit, (int, long))
- line = self._get_decoded_chars()
- start = 0
- if not self._decoder:
- pass
- decoder = self._get_decoder()
- pos = None
- endpos = None
- while True:
- if self._readtranslate:
- pos = line.find('\n', start)
- if pos >= 0:
- endpos = pos + 1
- break
- else:
- start = len(line)
- elif self._readuniversal:
- nlpos = line.find('\n', start)
- crpos = line.find('\r', start)
- if crpos == -1:
- if nlpos == -1:
- start = len(line)
- else:
- endpos = nlpos + 1
- break
- elif nlpos == -1:
- endpos = crpos + 1
- break
- elif nlpos < crpos:
- endpos = nlpos + 1
- break
- elif nlpos == crpos + 1:
- endpos = crpos + 2
- break
- else:
- endpos = crpos + 1
- break
- else:
- pos = line.find(self._readnl)
- if pos >= 0:
- endpos = pos + len(self._readnl)
- break
-
- if limit >= 0 and len(line) >= limit:
- endpos = limit
- break
-
- more_line = ''
- while self._read_chunk():
- if self._decoded_chars:
- break
- continue
- if self._decoded_chars:
- line += self._get_decoded_chars()
- continue
- self._set_decoded_chars('')
- self._snapshot = None
- return line
- if limit >= 0 and endpos > limit:
- endpos = limit
-
- self._rewind_decoded_chars(len(line) - endpos)
- return line[:endpos]
-
-
- def newlines(self):
- if self._decoder:
- return self._decoder.newlines
-
- newlines = property(newlines)
-
-
- class StringIO(TextIOWrapper):
- """An in-memory stream for text. The initial_value argument sets the
- value of object. The other arguments are like those of TextIOWrapper's
- constructor.
- """
-
- def __init__(self, initial_value = '', encoding = 'utf-8', errors = 'strict', newline = '\n'):
- super(StringIO, self).__init__(BytesIO(), encoding = encoding, errors = errors, newline = newline)
- if newline is None:
- self._writetranslate = False
-
- if initial_value:
- if not isinstance(initial_value, unicode):
- initial_value = unicode(initial_value)
-
- self.write(initial_value)
- self.seek(0)
-
-
-
- def getvalue(self):
- self.flush()
- return self.buffer.getvalue().decode(self._encoding, self._errors)
-
-
-